In [1]:
#Import Library 
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
import warnings
warnings.filterwarnings('ignore')
In [2]:
df = pd.read_csv("D:\\Projects\\PRJ Car Price Prediction\\audi.csv")
df
Out[2]:
model year price transmission mileage fuelType tax mpg engineSize
0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4
1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0
2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4
3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0
4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0
... ... ... ... ... ... ... ... ... ...
10663 A3 2020 16999 Manual 4018 Petrol 145 49.6 1.0
10664 A3 2020 16999 Manual 1978 Petrol 150 49.6 1.0
10665 A3 2020 17199 Manual 609 Petrol 150 49.6 1.0
10666 Q3 2017 19499 Automatic 8646 Petrol 150 47.9 1.4
10667 Q3 2016 15999 Manual 11855 Petrol 150 47.9 1.4

10668 rows × 9 columns

In [3]:
os.getcwd()
Out[3]:
'C:\\Users\\Lenovo'
In [4]:
df
Out[4]:
model year price transmission mileage fuelType tax mpg engineSize
0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4
1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0
2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4
3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0
4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0
... ... ... ... ... ... ... ... ... ...
10663 A3 2020 16999 Manual 4018 Petrol 145 49.6 1.0
10664 A3 2020 16999 Manual 1978 Petrol 150 49.6 1.0
10665 A3 2020 17199 Manual 609 Petrol 150 49.6 1.0
10666 Q3 2017 19499 Automatic 8646 Petrol 150 47.9 1.4
10667 Q3 2016 15999 Manual 11855 Petrol 150 47.9 1.4

10668 rows × 9 columns

In [5]:
#Pandas Profiling Report 
import pandas_profiling as pf
display(pf.ProfileReport(df))
Summarize dataset:   0%|          | 0/5 [00:00<?, ?it/s]
Generate report structure:   0%|          | 0/1 [00:00<?, ?it/s]
Render HTML:   0%|          | 0/1 [00:00<?, ?it/s]

In [6]:
#Number of records
print (len(df))
10668
In [7]:
#Number of records- Shape
display (df.shape)
(10668, 9)
In [8]:
#Checking the data types
display (df.dtypes )
model            object
year              int64
price             int64
transmission     object
mileage           int64
fuelType         object
tax               int64
mpg             float64
engineSize      float64
dtype: object
In [9]:
#Checking null values
display (df.isna().sum())
model           0
year            0
price           0
transmission    0
mileage         0
fuelType        0
tax             0
mpg             0
engineSize      0
dtype: int64
In [10]:
#Data set details – Info 
print (df.info())
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 10668 entries, 0 to 10667
Data columns (total 9 columns):
 #   Column        Non-Null Count  Dtype  
---  ------        --------------  -----  
 0   model         10668 non-null  object 
 1   year          10668 non-null  int64  
 2   price         10668 non-null  int64  
 3   transmission  10668 non-null  object 
 4   mileage       10668 non-null  int64  
 5   fuelType      10668 non-null  object 
 6   tax           10668 non-null  int64  
 7   mpg           10668 non-null  float64
 8   engineSize    10668 non-null  float64
dtypes: float64(2), int64(4), object(3)
memory usage: 750.2+ KB
None
In [11]:
#Data set details – Describe 
display (df.describe ())
year price mileage tax mpg engineSize
count 10668.000000 10668.000000 10668.000000 10668.000000 10668.000000 10668.000000
mean 2017.100675 22896.685039 24827.244001 126.011436 50.770022 1.930709
std 2.167494 11714.841888 23505.257205 67.170294 12.949782 0.602957
min 1997.000000 1490.000000 1.000000 0.000000 18.900000 0.000000
25% 2016.000000 15130.750000 5968.750000 125.000000 40.900000 1.500000
50% 2017.000000 20200.000000 19000.000000 145.000000 49.600000 2.000000
75% 2019.000000 27990.000000 36464.500000 145.000000 58.900000 2.000000
max 2020.000000 145000.000000 323000.000000 580.000000 188.300000 6.300000
In [12]:
#Create X 
X = df.iloc[:,[0,1,3,4,5,6,7,8]].values
display (X.shape)
display (X)
(10668, 8)
array([[' A1', 2017, 'Manual', ..., 150, 55.4, 1.4],
       [' A6', 2016, 'Automatic', ..., 20, 64.2, 2.0],
       [' A1', 2016, 'Manual', ..., 30, 55.4, 1.4],
       ...,
       [' A3', 2020, 'Manual', ..., 150, 49.6, 1.0],
       [' Q3', 2017, 'Automatic', ..., 150, 47.9, 1.4],
       [' Q3', 2016, 'Manual', ..., 150, 47.9, 1.4]], dtype=object)
In [13]:
#Create Y
Y = df.iloc[:,[2]].values
display (Y.shape)
display (Y)
(10668, 1)
array([[12500],
       [16500],
       [11000],
       ...,
       [17199],
       [19499],
       [15999]], dtype=int64)
In [14]:
#Display Top 5 - X variable 
display(pd.DataFrame(X).head(5))
0 1 2 3 4 5 6 7
0 A1 2017 Manual 15735 Petrol 150 55.4 1.4
1 A6 2016 Automatic 36203 Diesel 20 64.2 2.0
2 A1 2016 Manual 29946 Petrol 30 55.4 1.4
3 A4 2017 Automatic 25952 Diesel 145 67.3 2.0
4 A3 2019 Manual 1998 Petrol 145 49.6 1.0
In [15]:
#Display Top 5 - Y variable 
display(pd.DataFrame(Y).head(5))
0
0 12500
1 16500
2 11000
3 16800
4 17300
In [16]:
#Label Encoding Column – Model and Fuel Type
from sklearn.preprocessing import LabelEncoder
le1 = LabelEncoder()
X[:,0] = le1.fit_transform(X[:,0])
le2 = LabelEncoder()
X[:,-4] = le2.fit_transform(X[:,-4])
display (X)
array([[0, 2017, 'Manual', ..., 150, 55.4, 1.4],
       [5, 2016, 'Automatic', ..., 20, 64.2, 2.0],
       [0, 2016, 'Manual', ..., 30, 55.4, 1.4],
       ...,
       [2, 2020, 'Manual', ..., 150, 49.6, 1.0],
       [9, 2017, 'Automatic', ..., 150, 47.9, 1.4],
       [9, 2016, 'Manual', ..., 150, 47.9, 1.4]], dtype=object)
In [17]:
#One hot Encoding to column – transmission
from sklearn.preprocessing import OneHotEncoder
from sklearn.compose import ColumnTransformer
ct = ColumnTransformer(transformers = [('encoder',OneHotEncoder(),[2])],remainder='passthrough')
X = ct.fit_transform(X)
display (X.shape)
display (pd.DataFrame(X))
(10668, 10)
0 1 2 3 4 5 6 7 8 9
0 0.0 1.0 0.0 0 2017 15735 2 150 55.4 1.4
1 1.0 0.0 0.0 5 2016 36203 0 20 64.2 2.0
2 0.0 1.0 0.0 0 2016 29946 2 30 55.4 1.4
3 1.0 0.0 0.0 3 2017 25952 0 145 67.3 2.0
4 0.0 1.0 0.0 2 2019 1998 2 145 49.6 1.0
... ... ... ... ... ... ... ... ... ... ...
10663 0.0 1.0 0.0 2 2020 4018 2 145 49.6 1.0
10664 0.0 1.0 0.0 2 2020 1978 2 150 49.6 1.0
10665 0.0 1.0 0.0 2 2020 609 2 150 49.6 1.0
10666 1.0 0.0 0.0 9 2017 8646 2 150 47.9 1.4
10667 0.0 1.0 0.0 9 2016 11855 2 150 47.9 1.4

10668 rows × 10 columns

In [18]:
#Display – X 
display (pd.DataFrame(X))
0 1 2 3 4 5 6 7 8 9
0 0.0 1.0 0.0 0 2017 15735 2 150 55.4 1.4
1 1.0 0.0 0.0 5 2016 36203 0 20 64.2 2.0
2 0.0 1.0 0.0 0 2016 29946 2 30 55.4 1.4
3 1.0 0.0 0.0 3 2017 25952 0 145 67.3 2.0
4 0.0 1.0 0.0 2 2019 1998 2 145 49.6 1.0
... ... ... ... ... ... ... ... ... ... ...
10663 0.0 1.0 0.0 2 2020 4018 2 145 49.6 1.0
10664 0.0 1.0 0.0 2 2020 1978 2 150 49.6 1.0
10665 0.0 1.0 0.0 2 2020 609 2 150 49.6 1.0
10666 1.0 0.0 0.0 9 2017 8646 2 150 47.9 1.4
10667 0.0 1.0 0.0 9 2016 11855 2 150 47.9 1.4

10668 rows × 10 columns

In [19]:
#Features Scaling – Standardization 

from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X = sc.fit_transform(X)
display (pd.DataFrame(X))
0 1 2 3 4 5 6 7 8 9
0 -0.583268 1.200728 -0.712333 -1.123544 -0.046450 -0.386836 1.050783 0.357147 0.357550 -0.880218
1 1.714479 -0.832828 -0.712333 -0.160831 -0.507834 0.483989 -0.954181 -1.578323 1.037130 0.114925
2 -0.583268 1.200728 -0.712333 -1.123544 -0.507834 0.217781 1.050783 -1.429440 0.357550 -0.880218
3 1.714479 -0.832828 -0.712333 -0.545916 -0.046450 0.047853 -0.954181 0.282706 1.276528 0.114925
4 -0.583268 1.200728 -0.712333 -0.738459 0.876318 -0.971285 1.050783 0.282706 -0.090355 -1.543647
... ... ... ... ... ... ... ... ... ... ...
10663 -0.583268 1.200728 -0.712333 -0.738459 1.337702 -0.885343 1.050783 0.282706 -0.090355 -1.543647
10664 -0.583268 1.200728 -0.712333 -0.738459 1.337702 -0.972136 1.050783 0.357147 -0.090355 -1.543647
10665 -0.583268 1.200728 -0.712333 -0.738459 1.337702 -1.030381 1.050783 0.357147 -0.090355 -1.543647
10666 1.714479 -0.832828 -0.712333 0.609339 -0.046450 -0.688442 1.050783 0.357147 -0.221637 -0.880218
10667 -0.583268 1.200728 -0.712333 0.609339 -0.507834 -0.551913 1.050783 0.357147 -0.221637 -0.880218

10668 rows × 10 columns

In [20]:
#Train Test Split 
from sklearn.model_selection import train_test_split
(X_train,X_test,Y_train,Y_test) = train_test_split(X,Y,test_size=0.2,random_state=0)
print (X.shape, Y.shape)
print (X_train.shape, Y_train.shape)
print (X_test.shape, Y_test.shape)
(10668, 10) (10668, 1)
(8534, 10) (8534, 1)
(2134, 10) (2134, 1)
In [21]:
#Create Random Forest Regressor 

from sklearn.ensemble import RandomForestRegressor
regression = RandomForestRegressor(random_state=0)
regression.fit(X_train,Y_train)
display (regression)
RandomForestRegressor(random_state=0)
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
RandomForestRegressor(random_state=0)
In [22]:
#Prediction with Test Data 
y_pred = regression.predict(X_test)
display (y_pred)
array([14337.15, 23450.35, 27330.07, ..., 46275.18, 31359.  ,  9929.62])
In [23]:
#Display Predicted and actual Values resp. 

print(np.concatenate((y_pred.reshape(len(y_pred),1),Y_test.reshape(len(Y_test),1)),1))
[[14337.15 14998.  ]
 [23450.35 21950.  ]
 [27330.07 28990.  ]
 ...
 [46275.18 45995.  ]
 [31359.   30500.  ]
 [ 9929.62  8400.  ]]
In [24]:
#Display – Accuracy and Mean Absolute Error 

from sklearn.metrics import r2_score,mean_absolute_error
print  ('R2 Score ', r2_score(Y_test, y_pred))
print  ('Mean Absolute Error', mean_absolute_error(Y_test,y_pred))
R2 Score  0.9536134841307546
Mean Absolute Error 1538.730980670462
In [25]:
#Create a Linear Regression Model 

from sklearn.linear_model import LinearRegression
reg = LinearRegression()
reg.fit(X_train,Y_train)
print(reg)
LinearRegression()
In [26]:
#Prediction with Test Data 
y_pred = reg.predict(X_test)
display (y_pred)
array([[13095.55234668],
       [29380.55234668],
       [31837.55234668],
       ...,
       [42652.55234668],
       [31554.55234668],
       [ 7329.55234668]])
In [27]:
#Display predicted and actual Values 

print(np.concatenate((y_pred.reshape(len(y_pred),1),Y_test.reshape(len(Y_test),1)),1))
[[13095.55234668 14998.        ]
 [29380.55234668 21950.        ]
 [31837.55234668 28990.        ]
 ...
 [42652.55234668 45995.        ]
 [31554.55234668 30500.        ]
 [ 7329.55234668  8400.        ]]
In [28]:
#Display – Accuracy and Mean Absolute Error 

from sklearn.metrics import r2_score,mean_absolute_error
print  ('R2 Score ', r2_score(Y_test, y_pred))
print  ('Mean Absolute Error', mean_absolute_error(Y_test,y_pred))
R2 Score  0.7915568279145352
Mean Absolute Error 3379.644179345178
In [29]:
#Prediction for complete data set 
y_pred = reg.predict(X)
display (y_pred)
array([[14660.55234668],
       [20566.55234668],
       [13842.55234668],
       ...,
       [19436.55234668],
       [20924.55234668],
       [16720.55234668]])
In [30]:
#Display the Actual and predicted data  

result = pd.concat([df,pd.DataFrame(y_pred)],axis=1)
display( result)
model year price transmission mileage fuelType tax mpg engineSize 0
0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4 14660.552347
1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0 20566.552347
2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4 13842.552347
3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0 19843.552347
4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0 17379.552347
... ... ... ... ... ... ... ... ... ... ...
10663 A3 2020 16999 Manual 4018 Petrol 145 49.6 1.0 19219.552347
10664 A3 2020 16999 Manual 1978 Petrol 150 49.6 1.0 19306.552347
10665 A3 2020 17199 Manual 609 Petrol 150 49.6 1.0 19436.552347
10666 Q3 2017 19499 Automatic 8646 Petrol 150 47.9 1.4 20924.552347
10667 Q3 2016 15999 Manual 11855 Petrol 150 47.9 1.4 16720.552347

10668 rows × 10 columns

In [31]:
#Create Model Extra Tree Regressor 
from sklearn.ensemble import  ExtraTreesRegressor
ET_Model=ExtraTreesRegressor(n_estimators = 120)
ET_Model.fit(X_train,Y_train)
y_predict=ET_Model.predict(X_test)
from sklearn.metrics import r2_score,mean_absolute_error
print  ('R2 Score ', r2_score(Y_test, y_predict))
print  ('Mean Absolute Error', mean_absolute_error(Y_test,y_predict))
R2 Score  0.9570765070738761
Mean Absolute Error 1533.2059619389775
In [32]:
#Display the Result 
y_pred = reg.predict(X)
display (y_pred)
result = pd.concat([df,pd.DataFrame(y_pred)],axis=1)
display( result)
array([[14660.55234668],
       [20566.55234668],
       [13842.55234668],
       ...,
       [19436.55234668],
       [20924.55234668],
       [16720.55234668]])
model year price transmission mileage fuelType tax mpg engineSize 0
0 A1 2017 12500 Manual 15735 Petrol 150 55.4 1.4 14660.552347
1 A6 2016 16500 Automatic 36203 Diesel 20 64.2 2.0 20566.552347
2 A1 2016 11000 Manual 29946 Petrol 30 55.4 1.4 13842.552347
3 A4 2017 16800 Automatic 25952 Diesel 145 67.3 2.0 19843.552347
4 A3 2019 17300 Manual 1998 Petrol 145 49.6 1.0 17379.552347
... ... ... ... ... ... ... ... ... ... ...
10663 A3 2020 16999 Manual 4018 Petrol 145 49.6 1.0 19219.552347
10664 A3 2020 16999 Manual 1978 Petrol 150 49.6 1.0 19306.552347
10665 A3 2020 17199 Manual 609 Petrol 150 49.6 1.0 19436.552347
10666 Q3 2017 19499 Automatic 8646 Petrol 150 47.9 1.4 20924.552347
10667 Q3 2016 15999 Manual 11855 Petrol 150 47.9 1.4 16720.552347

10668 rows × 10 columns

In [33]:
#RandomizedSearchCV
# Hyperparameter Tuning and RandomizedSearchCV - Model used – RandomForestRegressor

from sklearn.model_selection import RandomizedSearchCV
n_estimators = [int(x) for x in np.linspace(start = 80, stop = 1500, num = 10)]
max_features = ['auto', 'sqrt']
max_depth = [int(x) for x in np.linspace(6, 45, num = 5)]
min_samples_split = [2, 5, 10, 15, 100]
min_samples_leaf = [1, 2, 5, 10]

# create random grid

rand_grid={'n_estimators': n_estimators,
               'max_features': max_features,
               'max_depth': max_depth,
               'min_samples_split': min_samples_split,
               'min_samples_leaf': min_samples_leaf}

rf=RandomForestRegressor()

rCV=RandomizedSearchCV(estimator=rf,param_distributions=rand_grid,scoring='neg_mean_squared_error',n_iter=3,cv=3,random_state=42, n_jobs = 1)
In [34]:
#Fit Model

display (rCV.fit(X_train,Y_train))
RandomizedSearchCV(cv=3, estimator=RandomForestRegressor(), n_iter=3, n_jobs=1,
                   param_distributions={'max_depth': [6, 15, 25, 35, 45],
                                        'max_features': ['auto', 'sqrt'],
                                        'min_samples_leaf': [1, 2, 5, 10],
                                        'min_samples_split': [2, 5, 10, 15,
                                                              100],
                                        'n_estimators': [80, 237, 395, 553, 711,
                                                         868, 1026, 1184, 1342,
                                                         1500]},
                   random_state=42, scoring='neg_mean_squared_error')
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
RandomizedSearchCV(cv=3, estimator=RandomForestRegressor(), n_iter=3, n_jobs=1,
                   param_distributions={'max_depth': [6, 15, 25, 35, 45],
                                        'max_features': ['auto', 'sqrt'],
                                        'min_samples_leaf': [1, 2, 5, 10],
                                        'min_samples_split': [2, 5, 10, 15,
                                                              100],
                                        'n_estimators': [80, 237, 395, 553, 711,
                                                         868, 1026, 1184, 1342,
                                                         1500]},
                   random_state=42, scoring='neg_mean_squared_error')
RandomForestRegressor()
RandomForestRegressor()
In [35]:
#Prediction 
rf_pred=rCV.predict(X_test)
In [36]:
#Mean_absolute_error and mean_squared_error

from sklearn.metrics import mean_absolute_error,mean_squared_error
print('MAE',mean_absolute_error(Y_test,rf_pred))
print('MSE',mean_squared_error(Y_test,rf_pred))
MAE 1504.3805344295797
MSE 5677323.888543681
In [37]:
#Display Accuracy
display (r2_score(Y_test,rf_pred))
0.9586851357630621
In [38]:
#Model CatBoostRegressor
from catboost import CatBoostRegressor
cat=CatBoostRegressor()
print (cat.fit(X_train,Y_train))
Learning rate set to 0.057452
0:	learn: 11184.0694779	total: 150ms	remaining: 2m 29s
1:	learn: 10690.2138498	total: 161ms	remaining: 1m 20s
2:	learn: 10221.1011877	total: 170ms	remaining: 56.6s
3:	learn: 9778.8067263	total: 180ms	remaining: 44.8s
4:	learn: 9382.6454054	total: 190ms	remaining: 37.8s
5:	learn: 8989.9290587	total: 198ms	remaining: 32.8s
6:	learn: 8628.4892653	total: 207ms	remaining: 29.3s
7:	learn: 8280.9179062	total: 216ms	remaining: 26.7s
8:	learn: 7950.5476053	total: 224ms	remaining: 24.7s
9:	learn: 7636.7735365	total: 230ms	remaining: 22.8s
10:	learn: 7349.6084699	total: 234ms	remaining: 21.1s
11:	learn: 7084.0457263	total: 239ms	remaining: 19.6s
12:	learn: 6842.7060927	total: 243ms	remaining: 18.4s
13:	learn: 6608.0244743	total: 247ms	remaining: 17.4s
14:	learn: 6387.7285726	total: 251ms	remaining: 16.5s
15:	learn: 6170.1354894	total: 255ms	remaining: 15.7s
16:	learn: 5973.9133066	total: 260ms	remaining: 15s
17:	learn: 5780.4713044	total: 264ms	remaining: 14.4s
18:	learn: 5611.7289090	total: 267ms	remaining: 13.8s
19:	learn: 5442.3513779	total: 272ms	remaining: 13.3s
20:	learn: 5286.7176367	total: 277ms	remaining: 12.9s
21:	learn: 5137.6512896	total: 280ms	remaining: 12.5s
22:	learn: 4990.7923670	total: 284ms	remaining: 12.1s
23:	learn: 4861.3009833	total: 289ms	remaining: 11.7s
24:	learn: 4739.6194386	total: 293ms	remaining: 11.4s
25:	learn: 4618.7487999	total: 297ms	remaining: 11.1s
26:	learn: 4506.4548775	total: 301ms	remaining: 10.8s
27:	learn: 4401.2065904	total: 305ms	remaining: 10.6s
28:	learn: 4305.6257343	total: 310ms	remaining: 10.4s
29:	learn: 4207.0507760	total: 314ms	remaining: 10.1s
30:	learn: 4124.0046256	total: 318ms	remaining: 9.95s
31:	learn: 4040.1936249	total: 323ms	remaining: 9.77s
32:	learn: 3968.0976460	total: 327ms	remaining: 9.57s
33:	learn: 3896.9106971	total: 331ms	remaining: 9.41s
34:	learn: 3827.6695910	total: 336ms	remaining: 9.26s
35:	learn: 3764.4378792	total: 340ms	remaining: 9.1s
36:	learn: 3703.8876617	total: 344ms	remaining: 8.95s
37:	learn: 3642.1855579	total: 347ms	remaining: 8.8s
38:	learn: 3593.4131916	total: 353ms	remaining: 8.7s
39:	learn: 3541.6799962	total: 358ms	remaining: 8.6s
40:	learn: 3493.4081900	total: 364ms	remaining: 8.5s
41:	learn: 3453.4874803	total: 370ms	remaining: 8.43s
42:	learn: 3410.5308321	total: 374ms	remaining: 8.31s
43:	learn: 3370.6809022	total: 378ms	remaining: 8.21s
44:	learn: 3333.4757971	total: 381ms	remaining: 8.1s
45:	learn: 3296.7604576	total: 386ms	remaining: 8s
46:	learn: 3264.0377013	total: 389ms	remaining: 7.89s
47:	learn: 3232.4741430	total: 394ms	remaining: 7.8s
48:	learn: 3196.7546158	total: 397ms	remaining: 7.71s
49:	learn: 3169.5785953	total: 401ms	remaining: 7.63s
50:	learn: 3145.2595753	total: 405ms	remaining: 7.54s
51:	learn: 3121.9559750	total: 411ms	remaining: 7.5s
52:	learn: 3103.9335304	total: 417ms	remaining: 7.44s
53:	learn: 3081.5474184	total: 422ms	remaining: 7.38s
54:	learn: 3058.2078780	total: 429ms	remaining: 7.37s
55:	learn: 3040.6918309	total: 433ms	remaining: 7.3s
56:	learn: 3023.6582508	total: 437ms	remaining: 7.23s
57:	learn: 3007.3255135	total: 441ms	remaining: 7.16s
58:	learn: 2990.4811558	total: 445ms	remaining: 7.09s
59:	learn: 2972.3283312	total: 449ms	remaining: 7.03s
60:	learn: 2959.7431445	total: 453ms	remaining: 6.97s
61:	learn: 2946.5923636	total: 457ms	remaining: 6.91s
62:	learn: 2934.9869357	total: 461ms	remaining: 6.86s
63:	learn: 2920.6625964	total: 465ms	remaining: 6.8s
64:	learn: 2905.9913604	total: 469ms	remaining: 6.74s
65:	learn: 2890.8150557	total: 473ms	remaining: 6.69s
66:	learn: 2881.8105155	total: 478ms	remaining: 6.65s
67:	learn: 2869.9838674	total: 481ms	remaining: 6.6s
68:	learn: 2856.9685778	total: 485ms	remaining: 6.55s
69:	learn: 2847.6677815	total: 490ms	remaining: 6.5s
70:	learn: 2838.3074544	total: 493ms	remaining: 6.46s
71:	learn: 2831.5916477	total: 498ms	remaining: 6.42s
72:	learn: 2823.7402368	total: 503ms	remaining: 6.38s
73:	learn: 2817.6964617	total: 506ms	remaining: 6.34s
74:	learn: 2806.9046704	total: 511ms	remaining: 6.3s
75:	learn: 2799.2999793	total: 514ms	remaining: 6.25s
76:	learn: 2792.2531112	total: 518ms	remaining: 6.21s
77:	learn: 2786.0539054	total: 522ms	remaining: 6.17s
78:	learn: 2778.3667928	total: 526ms	remaining: 6.13s
79:	learn: 2770.9670854	total: 530ms	remaining: 6.09s
80:	learn: 2765.4085686	total: 534ms	remaining: 6.06s
81:	learn: 2758.2673921	total: 539ms	remaining: 6.03s
82:	learn: 2752.2341491	total: 543ms	remaining: 6s
83:	learn: 2746.5786616	total: 547ms	remaining: 5.96s
84:	learn: 2734.6379923	total: 551ms	remaining: 5.93s
85:	learn: 2730.6071445	total: 556ms	remaining: 5.9s
86:	learn: 2724.7246149	total: 561ms	remaining: 5.89s
87:	learn: 2721.0052457	total: 567ms	remaining: 5.87s
88:	learn: 2712.3204256	total: 572ms	remaining: 5.86s
89:	learn: 2706.8884195	total: 577ms	remaining: 5.83s
90:	learn: 2700.6465881	total: 583ms	remaining: 5.83s
91:	learn: 2695.4837286	total: 588ms	remaining: 5.8s
92:	learn: 2684.0451492	total: 592ms	remaining: 5.77s
93:	learn: 2679.3709509	total: 595ms	remaining: 5.74s
94:	learn: 2671.3324958	total: 599ms	remaining: 5.71s
95:	learn: 2668.2768042	total: 603ms	remaining: 5.68s
96:	learn: 2664.6017210	total: 607ms	remaining: 5.65s
97:	learn: 2661.1200451	total: 611ms	remaining: 5.62s
98:	learn: 2658.3753198	total: 615ms	remaining: 5.59s
99:	learn: 2650.2996235	total: 621ms	remaining: 5.59s
100:	learn: 2648.1326775	total: 626ms	remaining: 5.57s
101:	learn: 2645.6487331	total: 631ms	remaining: 5.56s
102:	learn: 2642.4443354	total: 636ms	remaining: 5.54s
103:	learn: 2640.1216511	total: 643ms	remaining: 5.54s
104:	learn: 2636.1391523	total: 647ms	remaining: 5.51s
105:	learn: 2632.4110278	total: 651ms	remaining: 5.49s
106:	learn: 2624.0779999	total: 655ms	remaining: 5.47s
107:	learn: 2618.9404235	total: 659ms	remaining: 5.45s
108:	learn: 2617.2460690	total: 664ms	remaining: 5.43s
109:	learn: 2612.7806768	total: 669ms	remaining: 5.41s
110:	learn: 2607.2897650	total: 672ms	remaining: 5.38s
111:	learn: 2601.5187968	total: 676ms	remaining: 5.36s
112:	learn: 2599.0174087	total: 680ms	remaining: 5.34s
113:	learn: 2595.7902363	total: 684ms	remaining: 5.32s
114:	learn: 2589.7462674	total: 688ms	remaining: 5.29s
115:	learn: 2584.1747358	total: 692ms	remaining: 5.28s
116:	learn: 2581.5784516	total: 697ms	remaining: 5.26s
117:	learn: 2573.1943706	total: 701ms	remaining: 5.24s
118:	learn: 2571.2467133	total: 705ms	remaining: 5.22s
119:	learn: 2567.9967348	total: 709ms	remaining: 5.2s
120:	learn: 2561.8072039	total: 713ms	remaining: 5.18s
121:	learn: 2557.7552506	total: 717ms	remaining: 5.16s
122:	learn: 2554.4333020	total: 721ms	remaining: 5.14s
123:	learn: 2552.8292311	total: 725ms	remaining: 5.12s
124:	learn: 2548.1769382	total: 729ms	remaining: 5.1s
125:	learn: 2547.0079747	total: 732ms	remaining: 5.08s
126:	learn: 2545.3790353	total: 736ms	remaining: 5.06s
127:	learn: 2539.6944398	total: 740ms	remaining: 5.04s
128:	learn: 2534.5374763	total: 744ms	remaining: 5.02s
129:	learn: 2529.7280897	total: 748ms	remaining: 5s
130:	learn: 2527.4098548	total: 752ms	remaining: 4.99s
131:	learn: 2525.7165491	total: 755ms	remaining: 4.97s
132:	learn: 2523.7838455	total: 759ms	remaining: 4.95s
133:	learn: 2520.3784007	total: 763ms	remaining: 4.93s
134:	learn: 2517.5840823	total: 768ms	remaining: 4.92s
135:	learn: 2513.1034937	total: 774ms	remaining: 4.92s
136:	learn: 2509.4022766	total: 778ms	remaining: 4.9s
137:	learn: 2505.3916480	total: 783ms	remaining: 4.89s
138:	learn: 2503.7477850	total: 788ms	remaining: 4.88s
139:	learn: 2502.2292847	total: 795ms	remaining: 4.88s
140:	learn: 2496.4265667	total: 799ms	remaining: 4.87s
141:	learn: 2493.7826580	total: 804ms	remaining: 4.86s
142:	learn: 2491.3225450	total: 809ms	remaining: 4.84s
143:	learn: 2487.8639855	total: 814ms	remaining: 4.84s
144:	learn: 2486.1786714	total: 819ms	remaining: 4.83s
145:	learn: 2484.6798215	total: 825ms	remaining: 4.82s
146:	learn: 2481.9012350	total: 833ms	remaining: 4.83s
147:	learn: 2478.9312260	total: 838ms	remaining: 4.82s
148:	learn: 2476.5372412	total: 843ms	remaining: 4.81s
149:	learn: 2473.2546223	total: 848ms	remaining: 4.8s
150:	learn: 2471.4471464	total: 853ms	remaining: 4.79s
151:	learn: 2469.6409715	total: 860ms	remaining: 4.79s
152:	learn: 2466.9193203	total: 864ms	remaining: 4.78s
153:	learn: 2465.1844902	total: 869ms	remaining: 4.78s
154:	learn: 2463.0499117	total: 874ms	remaining: 4.76s
155:	learn: 2461.7827140	total: 878ms	remaining: 4.75s
156:	learn: 2458.9977863	total: 883ms	remaining: 4.74s
157:	learn: 2454.9587035	total: 889ms	remaining: 4.74s
158:	learn: 2451.9873804	total: 894ms	remaining: 4.73s
159:	learn: 2450.8561954	total: 900ms	remaining: 4.72s
160:	learn: 2449.7476635	total: 904ms	remaining: 4.71s
161:	learn: 2446.4227836	total: 909ms	remaining: 4.7s
162:	learn: 2444.2555123	total: 914ms	remaining: 4.69s
163:	learn: 2441.5688551	total: 920ms	remaining: 4.69s
164:	learn: 2440.2986221	total: 925ms	remaining: 4.68s
165:	learn: 2437.3232663	total: 931ms	remaining: 4.68s
166:	learn: 2432.4649721	total: 936ms	remaining: 4.67s
167:	learn: 2428.3970878	total: 941ms	remaining: 4.66s
168:	learn: 2423.6253903	total: 945ms	remaining: 4.65s
169:	learn: 2421.4159298	total: 950ms	remaining: 4.64s
170:	learn: 2420.5105888	total: 955ms	remaining: 4.63s
171:	learn: 2419.1046943	total: 959ms	remaining: 4.62s
172:	learn: 2415.9697367	total: 965ms	remaining: 4.61s
173:	learn: 2414.6048787	total: 970ms	remaining: 4.61s
174:	learn: 2413.5476062	total: 975ms	remaining: 4.6s
175:	learn: 2410.0292711	total: 981ms	remaining: 4.59s
176:	learn: 2409.2012840	total: 985ms	remaining: 4.58s
177:	learn: 2404.9199084	total: 992ms	remaining: 4.58s
178:	learn: 2403.6642991	total: 996ms	remaining: 4.57s
179:	learn: 2399.8983815	total: 1s	remaining: 4.56s
180:	learn: 2395.0973851	total: 1.01s	remaining: 4.55s
181:	learn: 2393.7417885	total: 1.01s	remaining: 4.54s
182:	learn: 2391.4422590	total: 1.01s	remaining: 4.53s
183:	learn: 2388.2745020	total: 1.02s	remaining: 4.52s
184:	learn: 2385.2843416	total: 1.02s	remaining: 4.51s
185:	learn: 2384.0606488	total: 1.03s	remaining: 4.5s
186:	learn: 2381.7253049	total: 1.03s	remaining: 4.5s
187:	learn: 2380.6296772	total: 1.04s	remaining: 4.49s
188:	learn: 2377.6691116	total: 1.05s	remaining: 4.49s
189:	learn: 2376.6098548	total: 1.05s	remaining: 4.5s
190:	learn: 2373.3608150	total: 1.06s	remaining: 4.49s
191:	learn: 2371.9139000	total: 1.06s	remaining: 4.48s
192:	learn: 2369.5546632	total: 1.07s	remaining: 4.47s
193:	learn: 2366.3459170	total: 1.07s	remaining: 4.46s
194:	learn: 2362.9178905	total: 1.08s	remaining: 4.46s
195:	learn: 2360.1859250	total: 1.08s	remaining: 4.45s
196:	learn: 2359.2590139	total: 1.09s	remaining: 4.44s
197:	learn: 2357.0284087	total: 1.09s	remaining: 4.43s
198:	learn: 2353.8146557	total: 1.1s	remaining: 4.42s
199:	learn: 2353.1600869	total: 1.1s	remaining: 4.42s
200:	learn: 2350.2201701	total: 1.11s	remaining: 4.41s
201:	learn: 2347.3400692	total: 1.11s	remaining: 4.4s
202:	learn: 2345.5937801	total: 1.12s	remaining: 4.39s
203:	learn: 2344.6418606	total: 1.12s	remaining: 4.38s
204:	learn: 2343.8773572	total: 1.13s	remaining: 4.37s
205:	learn: 2342.6759718	total: 1.13s	remaining: 4.36s
206:	learn: 2339.8990721	total: 1.14s	remaining: 4.36s
207:	learn: 2338.2296743	total: 1.14s	remaining: 4.35s
208:	learn: 2336.5341093	total: 1.15s	remaining: 4.34s
209:	learn: 2335.3146229	total: 1.15s	remaining: 4.33s
210:	learn: 2333.2988328	total: 1.16s	remaining: 4.32s
211:	learn: 2331.4510136	total: 1.16s	remaining: 4.32s
212:	learn: 2330.1170916	total: 1.17s	remaining: 4.31s
213:	learn: 2329.1335953	total: 1.17s	remaining: 4.3s
214:	learn: 2324.7341247	total: 1.18s	remaining: 4.3s
215:	learn: 2323.8905796	total: 1.18s	remaining: 4.29s
216:	learn: 2322.9088141	total: 1.19s	remaining: 4.29s
217:	learn: 2321.4407108	total: 1.19s	remaining: 4.28s
218:	learn: 2319.8959026	total: 1.2s	remaining: 4.27s
219:	learn: 2317.2280690	total: 1.2s	remaining: 4.27s
220:	learn: 2316.4073503	total: 1.21s	remaining: 4.26s
221:	learn: 2315.9774014	total: 1.21s	remaining: 4.25s
222:	learn: 2313.4353035	total: 1.22s	remaining: 4.24s
223:	learn: 2309.5002690	total: 1.22s	remaining: 4.24s
224:	learn: 2308.6886023	total: 1.23s	remaining: 4.23s
225:	learn: 2304.9689724	total: 1.23s	remaining: 4.22s
226:	learn: 2303.2677276	total: 1.24s	remaining: 4.22s
227:	learn: 2302.0813332	total: 1.24s	remaining: 4.21s
228:	learn: 2299.7896009	total: 1.25s	remaining: 4.21s
229:	learn: 2298.6975801	total: 1.25s	remaining: 4.2s
230:	learn: 2295.4513799	total: 1.26s	remaining: 4.19s
231:	learn: 2294.8327851	total: 1.26s	remaining: 4.18s
232:	learn: 2292.7337022	total: 1.27s	remaining: 4.17s
233:	learn: 2291.3686155	total: 1.27s	remaining: 4.16s
234:	learn: 2289.8722686	total: 1.27s	remaining: 4.15s
235:	learn: 2287.1903095	total: 1.28s	remaining: 4.14s
236:	learn: 2285.1795624	total: 1.28s	remaining: 4.13s
237:	learn: 2284.5492745	total: 1.28s	remaining: 4.12s
238:	learn: 2284.0598710	total: 1.29s	remaining: 4.1s
239:	learn: 2280.8355280	total: 1.29s	remaining: 4.09s
240:	learn: 2277.6796816	total: 1.3s	remaining: 4.08s
241:	learn: 2276.4744729	total: 1.3s	remaining: 4.07s
242:	learn: 2274.1066185	total: 1.3s	remaining: 4.06s
243:	learn: 2273.2995006	total: 1.31s	remaining: 4.05s
244:	learn: 2271.2913458	total: 1.31s	remaining: 4.04s
245:	learn: 2267.8707432	total: 1.32s	remaining: 4.04s
246:	learn: 2266.7160845	total: 1.32s	remaining: 4.04s
247:	learn: 2265.3929145	total: 1.33s	remaining: 4.04s
248:	learn: 2262.4179426	total: 1.33s	remaining: 4.03s
249:	learn: 2261.1526174	total: 1.34s	remaining: 4.01s
250:	learn: 2259.8092162	total: 1.35s	remaining: 4.02s
251:	learn: 2258.6097665	total: 1.35s	remaining: 4.01s
252:	learn: 2258.0384687	total: 1.35s	remaining: 4s
253:	learn: 2256.7364390	total: 1.36s	remaining: 3.99s
254:	learn: 2254.4523140	total: 1.36s	remaining: 3.98s
255:	learn: 2252.2397492	total: 1.37s	remaining: 3.99s
256:	learn: 2251.4537345	total: 1.38s	remaining: 3.98s
257:	learn: 2250.3081272	total: 1.38s	remaining: 3.97s
258:	learn: 2249.3424881	total: 1.39s	remaining: 3.97s
259:	learn: 2248.7847665	total: 1.39s	remaining: 3.96s
260:	learn: 2246.8981136	total: 1.4s	remaining: 3.96s
261:	learn: 2244.2123674	total: 1.41s	remaining: 3.96s
262:	learn: 2242.6369237	total: 1.41s	remaining: 3.95s
263:	learn: 2241.6767611	total: 1.42s	remaining: 3.96s
264:	learn: 2241.2817960	total: 1.42s	remaining: 3.95s
265:	learn: 2238.2773409	total: 1.43s	remaining: 3.94s
266:	learn: 2236.9083606	total: 1.44s	remaining: 3.94s
267:	learn: 2236.5013125	total: 1.44s	remaining: 3.94s
268:	learn: 2235.3727747	total: 1.45s	remaining: 3.94s
269:	learn: 2233.4085769	total: 1.46s	remaining: 3.93s
270:	learn: 2231.8458786	total: 1.46s	remaining: 3.92s
271:	learn: 2231.2707133	total: 1.47s	remaining: 3.92s
272:	learn: 2230.1912489	total: 1.47s	remaining: 3.92s
273:	learn: 2227.2714177	total: 1.47s	remaining: 3.91s
274:	learn: 2226.7498899	total: 1.48s	remaining: 3.91s
275:	learn: 2226.3826174	total: 1.49s	remaining: 3.9s
276:	learn: 2225.1456520	total: 1.49s	remaining: 3.89s
277:	learn: 2224.6893392	total: 1.5s	remaining: 3.88s
278:	learn: 2222.7238604	total: 1.5s	remaining: 3.88s
279:	learn: 2220.9794437	total: 1.5s	remaining: 3.87s
280:	learn: 2219.2772746	total: 1.51s	remaining: 3.86s
281:	learn: 2217.4638789	total: 1.51s	remaining: 3.85s
282:	learn: 2215.2710212	total: 1.52s	remaining: 3.85s
283:	learn: 2213.8374478	total: 1.52s	remaining: 3.84s
284:	learn: 2213.4277674	total: 1.52s	remaining: 3.83s
285:	learn: 2211.3747549	total: 1.53s	remaining: 3.82s
286:	learn: 2210.9939500	total: 1.53s	remaining: 3.81s
287:	learn: 2210.4672871	total: 1.54s	remaining: 3.8s
288:	learn: 2208.0419719	total: 1.54s	remaining: 3.79s
289:	learn: 2205.6687214	total: 1.54s	remaining: 3.78s
290:	learn: 2204.0098133	total: 1.55s	remaining: 3.77s
291:	learn: 2203.6478036	total: 1.55s	remaining: 3.77s
292:	learn: 2202.4326835	total: 1.56s	remaining: 3.76s
293:	learn: 2200.6478481	total: 1.56s	remaining: 3.75s
294:	learn: 2199.3330273	total: 1.57s	remaining: 3.75s
295:	learn: 2198.1228945	total: 1.57s	remaining: 3.74s
296:	learn: 2195.7625772	total: 1.58s	remaining: 3.73s
297:	learn: 2195.3184198	total: 1.58s	remaining: 3.73s
298:	learn: 2195.0797540	total: 1.59s	remaining: 3.72s
299:	learn: 2194.0416372	total: 1.59s	remaining: 3.71s
300:	learn: 2193.2579818	total: 1.59s	remaining: 3.7s
301:	learn: 2192.0421561	total: 1.6s	remaining: 3.69s
302:	learn: 2189.8916533	total: 1.6s	remaining: 3.69s
303:	learn: 2187.6162775	total: 1.61s	remaining: 3.68s
304:	learn: 2186.5523068	total: 1.61s	remaining: 3.67s
305:	learn: 2185.0648494	total: 1.62s	remaining: 3.67s
306:	learn: 2183.0338672	total: 1.62s	remaining: 3.66s
307:	learn: 2182.3582529	total: 1.63s	remaining: 3.65s
308:	learn: 2181.2600131	total: 1.63s	remaining: 3.65s
309:	learn: 2179.2779872	total: 1.64s	remaining: 3.64s
310:	learn: 2177.3968483	total: 1.64s	remaining: 3.64s
311:	learn: 2175.6936531	total: 1.65s	remaining: 3.63s
312:	learn: 2175.1346380	total: 1.65s	remaining: 3.62s
313:	learn: 2174.3948443	total: 1.65s	remaining: 3.62s
314:	learn: 2174.0686101	total: 1.66s	remaining: 3.61s
315:	learn: 2173.5648035	total: 1.66s	remaining: 3.6s
316:	learn: 2172.1935105	total: 1.67s	remaining: 3.59s
317:	learn: 2170.5700401	total: 1.67s	remaining: 3.58s
318:	learn: 2168.7470746	total: 1.67s	remaining: 3.57s
319:	learn: 2167.5437761	total: 1.68s	remaining: 3.57s
320:	learn: 2165.5790789	total: 1.68s	remaining: 3.56s
321:	learn: 2164.2609405	total: 1.69s	remaining: 3.55s
322:	learn: 2162.5119577	total: 1.69s	remaining: 3.54s
323:	learn: 2161.4811771	total: 1.69s	remaining: 3.53s
324:	learn: 2160.5458029	total: 1.7s	remaining: 3.53s
325:	learn: 2158.9245109	total: 1.7s	remaining: 3.52s
326:	learn: 2157.0096074	total: 1.71s	remaining: 3.51s
327:	learn: 2156.0953247	total: 1.71s	remaining: 3.5s
328:	learn: 2155.2997990	total: 1.71s	remaining: 3.49s
329:	learn: 2154.0676384	total: 1.72s	remaining: 3.49s
330:	learn: 2152.1472733	total: 1.72s	remaining: 3.48s
331:	learn: 2151.6773078	total: 1.73s	remaining: 3.47s
332:	learn: 2151.1380063	total: 1.73s	remaining: 3.46s
333:	learn: 2149.9980622	total: 1.73s	remaining: 3.46s
334:	learn: 2148.6561715	total: 1.74s	remaining: 3.45s
335:	learn: 2147.5979814	total: 1.74s	remaining: 3.44s
336:	learn: 2146.4366203	total: 1.74s	remaining: 3.43s
337:	learn: 2145.3221621	total: 1.75s	remaining: 3.42s
338:	learn: 2144.1606578	total: 1.75s	remaining: 3.42s
339:	learn: 2143.4114760	total: 1.76s	remaining: 3.41s
340:	learn: 2141.9134923	total: 1.76s	remaining: 3.41s
341:	learn: 2141.5027453	total: 1.77s	remaining: 3.4s
342:	learn: 2140.5973868	total: 1.77s	remaining: 3.4s
343:	learn: 2140.1711968	total: 1.78s	remaining: 3.39s
344:	learn: 2139.1309802	total: 1.78s	remaining: 3.38s
345:	learn: 2138.1651205	total: 1.79s	remaining: 3.38s
346:	learn: 2136.6793748	total: 1.79s	remaining: 3.37s
347:	learn: 2134.9271288	total: 1.8s	remaining: 3.37s
348:	learn: 2133.4990990	total: 1.8s	remaining: 3.36s
349:	learn: 2132.9579709	total: 1.8s	remaining: 3.35s
350:	learn: 2131.3568877	total: 1.81s	remaining: 3.34s
351:	learn: 2130.0934606	total: 1.81s	remaining: 3.34s
352:	learn: 2129.1291428	total: 1.82s	remaining: 3.33s
353:	learn: 2127.2119683	total: 1.82s	remaining: 3.33s
354:	learn: 2126.2466287	total: 1.83s	remaining: 3.32s
355:	learn: 2125.9083128	total: 1.83s	remaining: 3.31s
356:	learn: 2125.6451110	total: 1.84s	remaining: 3.31s
357:	learn: 2123.9260800	total: 1.84s	remaining: 3.3s
358:	learn: 2122.2698937	total: 1.84s	remaining: 3.29s
359:	learn: 2120.6817881	total: 1.85s	remaining: 3.29s
360:	learn: 2120.4331669	total: 1.85s	remaining: 3.28s
361:	learn: 2118.9594319	total: 1.86s	remaining: 3.28s
362:	learn: 2118.1315547	total: 1.86s	remaining: 3.27s
363:	learn: 2116.6849988	total: 1.87s	remaining: 3.26s
364:	learn: 2115.7430720	total: 1.87s	remaining: 3.25s
365:	learn: 2115.4083834	total: 1.88s	remaining: 3.25s
366:	learn: 2114.4777345	total: 1.88s	remaining: 3.25s
367:	learn: 2113.1287812	total: 1.89s	remaining: 3.24s
368:	learn: 2112.0570443	total: 1.89s	remaining: 3.23s
369:	learn: 2111.4712932	total: 1.9s	remaining: 3.23s
370:	learn: 2109.7901911	total: 1.9s	remaining: 3.22s
371:	learn: 2108.9720656	total: 1.91s	remaining: 3.22s
372:	learn: 2108.0837174	total: 1.91s	remaining: 3.21s
373:	learn: 2107.4173722	total: 1.92s	remaining: 3.21s
374:	learn: 2105.9077041	total: 1.92s	remaining: 3.2s
375:	learn: 2105.0434116	total: 1.93s	remaining: 3.2s
376:	learn: 2104.1928470	total: 1.93s	remaining: 3.19s
377:	learn: 2102.8518835	total: 1.93s	remaining: 3.18s
378:	learn: 2102.3280049	total: 1.94s	remaining: 3.18s
379:	learn: 2101.5647406	total: 1.94s	remaining: 3.17s
380:	learn: 2099.8922332	total: 1.95s	remaining: 3.17s
381:	learn: 2099.3944989	total: 1.96s	remaining: 3.16s
382:	learn: 2097.8868122	total: 1.96s	remaining: 3.16s
383:	learn: 2097.1129248	total: 1.97s	remaining: 3.15s
384:	learn: 2096.5131524	total: 1.97s	remaining: 3.15s
385:	learn: 2095.6390710	total: 1.98s	remaining: 3.14s
386:	learn: 2094.9377428	total: 1.98s	remaining: 3.14s
387:	learn: 2093.8320801	total: 1.99s	remaining: 3.14s
388:	learn: 2092.9831235	total: 2s	remaining: 3.13s
389:	learn: 2092.4251574	total: 2s	remaining: 3.14s
390:	learn: 2092.0229687	total: 2.01s	remaining: 3.13s
391:	learn: 2091.0829485	total: 2.02s	remaining: 3.13s
392:	learn: 2090.3250834	total: 2.02s	remaining: 3.13s
393:	learn: 2088.1079012	total: 2.03s	remaining: 3.12s
394:	learn: 2087.8986596	total: 2.03s	remaining: 3.11s
395:	learn: 2086.9032005	total: 2.04s	remaining: 3.11s
396:	learn: 2086.4395223	total: 2.04s	remaining: 3.1s
397:	learn: 2085.7447370	total: 2.05s	remaining: 3.1s
398:	learn: 2084.8575496	total: 2.05s	remaining: 3.1s
399:	learn: 2083.7415256	total: 2.06s	remaining: 3.09s
400:	learn: 2083.0369860	total: 2.07s	remaining: 3.09s
401:	learn: 2082.6884436	total: 2.07s	remaining: 3.08s
402:	learn: 2081.7538694	total: 2.08s	remaining: 3.08s
403:	learn: 2081.2170399	total: 2.08s	remaining: 3.07s
404:	learn: 2080.6131672	total: 2.08s	remaining: 3.06s
405:	learn: 2080.2306545	total: 2.09s	remaining: 3.05s
406:	learn: 2078.8843389	total: 2.09s	remaining: 3.05s
407:	learn: 2078.3923753	total: 2.1s	remaining: 3.04s
408:	learn: 2077.1229855	total: 2.1s	remaining: 3.03s
409:	learn: 2076.8295811	total: 2.1s	remaining: 3.03s
410:	learn: 2076.3552737	total: 2.11s	remaining: 3.02s
411:	learn: 2075.3314118	total: 2.11s	remaining: 3.01s
412:	learn: 2074.3620299	total: 2.12s	remaining: 3.01s
413:	learn: 2073.7485263	total: 2.12s	remaining: 3s
414:	learn: 2073.2672776	total: 2.12s	remaining: 2.99s
415:	learn: 2072.5119144	total: 2.13s	remaining: 2.98s
416:	learn: 2071.2966911	total: 2.13s	remaining: 2.98s
417:	learn: 2070.9719841	total: 2.13s	remaining: 2.97s
418:	learn: 2069.6649051	total: 2.14s	remaining: 2.96s
419:	learn: 2069.2530742	total: 2.14s	remaining: 2.96s
420:	learn: 2067.8643085	total: 2.15s	remaining: 2.95s
421:	learn: 2067.1527675	total: 2.15s	remaining: 2.94s
422:	learn: 2066.9025605	total: 2.15s	remaining: 2.94s
423:	learn: 2065.6107786	total: 2.16s	remaining: 2.93s
424:	learn: 2064.5494343	total: 2.16s	remaining: 2.93s
425:	learn: 2064.1986889	total: 2.17s	remaining: 2.92s
426:	learn: 2063.4126644	total: 2.17s	remaining: 2.92s
427:	learn: 2062.3657353	total: 2.18s	remaining: 2.91s
428:	learn: 2061.1856541	total: 2.18s	remaining: 2.91s
429:	learn: 2060.0766706	total: 2.19s	remaining: 2.9s
430:	learn: 2059.6011787	total: 2.19s	remaining: 2.9s
431:	learn: 2058.6919478	total: 2.2s	remaining: 2.89s
432:	learn: 2058.1497193	total: 2.2s	remaining: 2.88s
433:	learn: 2057.7045388	total: 2.21s	remaining: 2.88s
434:	learn: 2056.5844582	total: 2.21s	remaining: 2.87s
435:	learn: 2056.2417563	total: 2.22s	remaining: 2.87s
436:	learn: 2055.0813455	total: 2.22s	remaining: 2.86s
437:	learn: 2053.8201515	total: 2.23s	remaining: 2.86s
438:	learn: 2053.1830142	total: 2.23s	remaining: 2.85s
439:	learn: 2052.8203145	total: 2.24s	remaining: 2.85s
440:	learn: 2052.0275189	total: 2.24s	remaining: 2.85s
441:	learn: 2050.9832104	total: 2.25s	remaining: 2.84s
442:	learn: 2050.5939400	total: 2.25s	remaining: 2.84s
443:	learn: 2049.6303754	total: 2.26s	remaining: 2.83s
444:	learn: 2049.2536153	total: 2.26s	remaining: 2.82s
445:	learn: 2048.6044518	total: 2.27s	remaining: 2.82s
446:	learn: 2047.9174875	total: 2.27s	remaining: 2.81s
447:	learn: 2047.1583329	total: 2.28s	remaining: 2.81s
448:	learn: 2046.9824522	total: 2.29s	remaining: 2.81s
449:	learn: 2046.3691624	total: 2.29s	remaining: 2.8s
450:	learn: 2045.4732471	total: 2.29s	remaining: 2.79s
451:	learn: 2044.6858476	total: 2.3s	remaining: 2.79s
452:	learn: 2043.5823245	total: 2.31s	remaining: 2.78s
453:	learn: 2042.6867647	total: 2.31s	remaining: 2.78s
454:	learn: 2042.3752872	total: 2.32s	remaining: 2.77s
455:	learn: 2041.1571101	total: 2.32s	remaining: 2.77s
456:	learn: 2039.8882417	total: 2.33s	remaining: 2.76s
457:	learn: 2038.8534851	total: 2.33s	remaining: 2.76s
458:	learn: 2037.9920866	total: 2.34s	remaining: 2.76s
459:	learn: 2037.4303184	total: 2.34s	remaining: 2.75s
460:	learn: 2036.6993182	total: 2.35s	remaining: 2.75s
461:	learn: 2035.9009810	total: 2.36s	remaining: 2.74s
462:	learn: 2035.0864239	total: 2.36s	remaining: 2.74s
463:	learn: 2034.3824600	total: 2.37s	remaining: 2.74s
464:	learn: 2034.1993500	total: 2.37s	remaining: 2.73s
465:	learn: 2033.3609059	total: 2.38s	remaining: 2.73s
466:	learn: 2032.9821501	total: 2.39s	remaining: 2.73s
467:	learn: 2032.5482135	total: 2.4s	remaining: 2.72s
468:	learn: 2032.1024379	total: 2.4s	remaining: 2.72s
469:	learn: 2031.6525294	total: 2.4s	remaining: 2.71s
470:	learn: 2031.1200697	total: 2.41s	remaining: 2.71s
471:	learn: 2030.4309641	total: 2.42s	remaining: 2.7s
472:	learn: 2029.9703506	total: 2.42s	remaining: 2.7s
473:	learn: 2029.1619608	total: 2.43s	remaining: 2.69s
474:	learn: 2028.3173172	total: 2.43s	remaining: 2.69s
475:	learn: 2027.4450816	total: 2.44s	remaining: 2.68s
476:	learn: 2026.4852248	total: 2.44s	remaining: 2.68s
477:	learn: 2025.7125620	total: 2.45s	remaining: 2.67s
478:	learn: 2024.4385398	total: 2.45s	remaining: 2.67s
479:	learn: 2023.7573534	total: 2.46s	remaining: 2.66s
480:	learn: 2022.9267016	total: 2.46s	remaining: 2.65s
481:	learn: 2022.5285079	total: 2.46s	remaining: 2.65s
482:	learn: 2021.6201313	total: 2.47s	remaining: 2.64s
483:	learn: 2020.6956057	total: 2.47s	remaining: 2.63s
484:	learn: 2019.6415235	total: 2.48s	remaining: 2.63s
485:	learn: 2019.2660694	total: 2.48s	remaining: 2.62s
486:	learn: 2018.9854237	total: 2.48s	remaining: 2.62s
487:	learn: 2017.8809698	total: 2.49s	remaining: 2.61s
488:	learn: 2016.5544473	total: 2.49s	remaining: 2.6s
489:	learn: 2016.2635297	total: 2.5s	remaining: 2.6s
490:	learn: 2015.9216242	total: 2.5s	remaining: 2.59s
491:	learn: 2015.3919735	total: 2.51s	remaining: 2.59s
492:	learn: 2014.7855567	total: 2.51s	remaining: 2.58s
493:	learn: 2014.4619330	total: 2.52s	remaining: 2.58s
494:	learn: 2013.9176550	total: 2.52s	remaining: 2.57s
495:	learn: 2013.6272291	total: 2.53s	remaining: 2.57s
496:	learn: 2012.8434483	total: 2.53s	remaining: 2.56s
497:	learn: 2012.1226817	total: 2.54s	remaining: 2.56s
498:	learn: 2011.3481377	total: 2.54s	remaining: 2.55s
499:	learn: 2010.4012031	total: 2.55s	remaining: 2.55s
500:	learn: 2009.4223510	total: 2.55s	remaining: 2.54s
501:	learn: 2007.9642496	total: 2.56s	remaining: 2.54s
502:	learn: 2006.6061373	total: 2.56s	remaining: 2.53s
503:	learn: 2006.3496774	total: 2.57s	remaining: 2.53s
504:	learn: 2005.8526902	total: 2.57s	remaining: 2.52s
505:	learn: 2004.7208606	total: 2.58s	remaining: 2.52s
506:	learn: 2003.4356822	total: 2.58s	remaining: 2.51s
507:	learn: 2002.9549950	total: 2.59s	remaining: 2.51s
508:	learn: 2002.2660866	total: 2.6s	remaining: 2.5s
509:	learn: 2001.7655635	total: 2.6s	remaining: 2.5s
510:	learn: 2001.4857925	total: 2.61s	remaining: 2.49s
511:	learn: 2000.8821102	total: 2.61s	remaining: 2.49s
512:	learn: 2000.1964240	total: 2.62s	remaining: 2.49s
513:	learn: 1999.7786055	total: 2.62s	remaining: 2.48s
514:	learn: 1998.5400249	total: 2.63s	remaining: 2.48s
515:	learn: 1998.0114463	total: 2.63s	remaining: 2.47s
516:	learn: 1996.9762909	total: 2.64s	remaining: 2.47s
517:	learn: 1995.6335126	total: 2.64s	remaining: 2.46s
518:	learn: 1995.3818569	total: 2.65s	remaining: 2.46s
519:	learn: 1994.6836261	total: 2.65s	remaining: 2.45s
520:	learn: 1993.7077334	total: 2.66s	remaining: 2.44s
521:	learn: 1993.4826742	total: 2.67s	remaining: 2.44s
522:	learn: 1992.4700473	total: 2.67s	remaining: 2.44s
523:	learn: 1991.3567304	total: 2.67s	remaining: 2.43s
524:	learn: 1990.6762970	total: 2.68s	remaining: 2.43s
525:	learn: 1989.5606652	total: 2.69s	remaining: 2.42s
526:	learn: 1988.7483715	total: 2.69s	remaining: 2.41s
527:	learn: 1987.7319439	total: 2.7s	remaining: 2.41s
528:	learn: 1987.4859471	total: 2.7s	remaining: 2.4s
529:	learn: 1986.5776344	total: 2.71s	remaining: 2.4s
530:	learn: 1985.8532232	total: 2.71s	remaining: 2.4s
531:	learn: 1985.2637417	total: 2.72s	remaining: 2.39s
532:	learn: 1984.6465808	total: 2.72s	remaining: 2.38s
533:	learn: 1984.0405309	total: 2.73s	remaining: 2.38s
534:	learn: 1983.5475987	total: 2.73s	remaining: 2.37s
535:	learn: 1982.5973042	total: 2.73s	remaining: 2.37s
536:	learn: 1981.9642603	total: 2.74s	remaining: 2.36s
537:	learn: 1981.1852376	total: 2.75s	remaining: 2.36s
538:	learn: 1980.4182747	total: 2.75s	remaining: 2.35s
539:	learn: 1978.7333318	total: 2.76s	remaining: 2.35s
540:	learn: 1977.6620993	total: 2.76s	remaining: 2.34s
541:	learn: 1976.1489712	total: 2.77s	remaining: 2.34s
542:	learn: 1976.0680284	total: 2.77s	remaining: 2.33s
543:	learn: 1975.6461332	total: 2.78s	remaining: 2.33s
544:	learn: 1974.9214418	total: 2.78s	remaining: 2.32s
545:	learn: 1973.9625504	total: 2.79s	remaining: 2.32s
546:	learn: 1973.3496809	total: 2.79s	remaining: 2.31s
547:	learn: 1973.1396807	total: 2.8s	remaining: 2.31s
548:	learn: 1972.5971978	total: 2.8s	remaining: 2.3s
549:	learn: 1971.8903485	total: 2.81s	remaining: 2.3s
550:	learn: 1970.7768482	total: 2.81s	remaining: 2.29s
551:	learn: 1970.1405185	total: 2.82s	remaining: 2.29s
552:	learn: 1969.7304828	total: 2.83s	remaining: 2.29s
553:	learn: 1969.3697204	total: 2.84s	remaining: 2.28s
554:	learn: 1968.8312174	total: 2.84s	remaining: 2.28s
555:	learn: 1967.7808421	total: 2.85s	remaining: 2.27s
556:	learn: 1967.4342981	total: 2.85s	remaining: 2.27s
557:	learn: 1966.9093022	total: 2.86s	remaining: 2.26s
558:	learn: 1966.4605710	total: 2.86s	remaining: 2.26s
559:	learn: 1966.2404654	total: 2.87s	remaining: 2.25s
560:	learn: 1966.0851063	total: 2.87s	remaining: 2.25s
561:	learn: 1965.0358799	total: 2.88s	remaining: 2.24s
562:	learn: 1964.3943368	total: 2.88s	remaining: 2.24s
563:	learn: 1964.1858422	total: 2.89s	remaining: 2.23s
564:	learn: 1963.5435064	total: 2.89s	remaining: 2.23s
565:	learn: 1962.6070521	total: 2.9s	remaining: 2.22s
566:	learn: 1962.0599152	total: 2.9s	remaining: 2.22s
567:	learn: 1961.5265994	total: 2.91s	remaining: 2.21s
568:	learn: 1961.1122149	total: 2.91s	remaining: 2.21s
569:	learn: 1960.8238389	total: 2.92s	remaining: 2.2s
570:	learn: 1960.3653494	total: 2.92s	remaining: 2.2s
571:	learn: 1960.0073179	total: 2.93s	remaining: 2.19s
572:	learn: 1959.5008133	total: 2.94s	remaining: 2.19s
573:	learn: 1959.2899951	total: 2.94s	remaining: 2.18s
574:	learn: 1958.6013168	total: 2.94s	remaining: 2.17s
575:	learn: 1957.6812709	total: 2.95s	remaining: 2.17s
576:	learn: 1957.4891319	total: 2.95s	remaining: 2.17s
577:	learn: 1957.2146640	total: 2.96s	remaining: 2.16s
578:	learn: 1956.7715926	total: 2.97s	remaining: 2.16s
579:	learn: 1956.0808357	total: 2.97s	remaining: 2.15s
580:	learn: 1955.3695098	total: 2.98s	remaining: 2.15s
581:	learn: 1955.2132138	total: 2.99s	remaining: 2.14s
582:	learn: 1954.8613484	total: 2.99s	remaining: 2.14s
583:	learn: 1954.4069408	total: 3s	remaining: 2.13s
584:	learn: 1953.8397475	total: 3s	remaining: 2.13s
585:	learn: 1953.1768933	total: 3s	remaining: 2.12s
586:	learn: 1952.4782126	total: 3.01s	remaining: 2.12s
587:	learn: 1952.0286119	total: 3.01s	remaining: 2.11s
588:	learn: 1950.9369038	total: 3.02s	remaining: 2.11s
589:	learn: 1950.4146139	total: 3.02s	remaining: 2.1s
590:	learn: 1950.1282394	total: 3.03s	remaining: 2.1s
591:	learn: 1949.7933365	total: 3.03s	remaining: 2.09s
592:	learn: 1949.0080405	total: 3.04s	remaining: 2.08s
593:	learn: 1948.2223420	total: 3.04s	remaining: 2.08s
594:	learn: 1947.9928856	total: 3.05s	remaining: 2.07s
595:	learn: 1947.2646529	total: 3.05s	remaining: 2.07s
596:	learn: 1946.8385769	total: 3.06s	remaining: 2.06s
597:	learn: 1946.3798361	total: 3.06s	remaining: 2.06s
598:	learn: 1945.9209756	total: 3.06s	remaining: 2.05s
599:	learn: 1945.4277347	total: 3.07s	remaining: 2.04s
600:	learn: 1944.8442789	total: 3.07s	remaining: 2.04s
601:	learn: 1944.6315215	total: 3.07s	remaining: 2.03s
602:	learn: 1944.1470225	total: 3.08s	remaining: 2.03s
603:	learn: 1943.4117957	total: 3.08s	remaining: 2.02s
604:	learn: 1942.7604691	total: 3.09s	remaining: 2.01s
605:	learn: 1941.9747941	total: 3.09s	remaining: 2.01s
606:	learn: 1941.1599281	total: 3.09s	remaining: 2s
607:	learn: 1940.7563392	total: 3.1s	remaining: 2s
608:	learn: 1940.4153862	total: 3.1s	remaining: 1.99s
609:	learn: 1939.6180834	total: 3.1s	remaining: 1.99s
610:	learn: 1939.0908116	total: 3.11s	remaining: 1.98s
611:	learn: 1938.5568661	total: 3.11s	remaining: 1.97s
612:	learn: 1938.1051539	total: 3.12s	remaining: 1.97s
613:	learn: 1937.2394544	total: 3.12s	remaining: 1.96s
614:	learn: 1936.8663732	total: 3.13s	remaining: 1.96s
615:	learn: 1936.4281529	total: 3.13s	remaining: 1.95s
616:	learn: 1936.0937120	total: 3.13s	remaining: 1.95s
617:	learn: 1935.7719810	total: 3.14s	remaining: 1.94s
618:	learn: 1935.2913645	total: 3.14s	remaining: 1.93s
619:	learn: 1934.4904903	total: 3.15s	remaining: 1.93s
620:	learn: 1934.1800849	total: 3.15s	remaining: 1.92s
621:	learn: 1933.4538768	total: 3.15s	remaining: 1.92s
622:	learn: 1933.0993932	total: 3.16s	remaining: 1.91s
623:	learn: 1932.6865162	total: 3.16s	remaining: 1.91s
624:	learn: 1931.9273461	total: 3.17s	remaining: 1.9s
625:	learn: 1931.2822183	total: 3.17s	remaining: 1.9s
626:	learn: 1930.4819312	total: 3.18s	remaining: 1.89s
627:	learn: 1929.8859706	total: 3.18s	remaining: 1.89s
628:	learn: 1929.5041891	total: 3.19s	remaining: 1.88s
629:	learn: 1929.3558970	total: 3.19s	remaining: 1.87s
630:	learn: 1929.1645494	total: 3.19s	remaining: 1.87s
631:	learn: 1928.9195363	total: 3.2s	remaining: 1.86s
632:	learn: 1928.2221433	total: 3.2s	remaining: 1.86s
633:	learn: 1927.9877195	total: 3.21s	remaining: 1.85s
634:	learn: 1927.2350665	total: 3.21s	remaining: 1.84s
635:	learn: 1927.0566618	total: 3.22s	remaining: 1.84s
636:	learn: 1926.4536636	total: 3.22s	remaining: 1.83s
637:	learn: 1925.9713985	total: 3.23s	remaining: 1.83s
638:	learn: 1925.0475491	total: 3.23s	remaining: 1.83s
639:	learn: 1924.7392229	total: 3.24s	remaining: 1.82s
640:	learn: 1924.2557266	total: 3.24s	remaining: 1.81s
641:	learn: 1923.6991232	total: 3.25s	remaining: 1.81s
642:	learn: 1923.1585026	total: 3.25s	remaining: 1.8s
643:	learn: 1922.2830460	total: 3.25s	remaining: 1.8s
644:	learn: 1921.6582178	total: 3.26s	remaining: 1.79s
645:	learn: 1921.4140145	total: 3.26s	remaining: 1.79s
646:	learn: 1920.7957001	total: 3.27s	remaining: 1.78s
647:	learn: 1920.4441658	total: 3.27s	remaining: 1.78s
648:	learn: 1919.3724824	total: 3.27s	remaining: 1.77s
649:	learn: 1919.1538698	total: 3.28s	remaining: 1.76s
650:	learn: 1918.5537974	total: 3.28s	remaining: 1.76s
651:	learn: 1917.9168811	total: 3.29s	remaining: 1.75s
652:	learn: 1917.4812191	total: 3.29s	remaining: 1.75s
653:	learn: 1916.8367083	total: 3.29s	remaining: 1.74s
654:	learn: 1916.2671496	total: 3.3s	remaining: 1.74s
655:	learn: 1916.0028790	total: 3.3s	remaining: 1.73s
656:	learn: 1915.4531962	total: 3.3s	remaining: 1.73s
657:	learn: 1914.7265362	total: 3.31s	remaining: 1.72s
658:	learn: 1914.2940974	total: 3.31s	remaining: 1.71s
659:	learn: 1913.9447105	total: 3.31s	remaining: 1.71s
660:	learn: 1913.2531448	total: 3.32s	remaining: 1.7s
661:	learn: 1912.5262362	total: 3.32s	remaining: 1.7s
662:	learn: 1911.8972712	total: 3.33s	remaining: 1.69s
663:	learn: 1911.3302694	total: 3.33s	remaining: 1.69s
664:	learn: 1909.9768217	total: 3.34s	remaining: 1.68s
665:	learn: 1909.3816338	total: 3.34s	remaining: 1.68s
666:	learn: 1909.1329520	total: 3.35s	remaining: 1.67s
667:	learn: 1908.7373833	total: 3.35s	remaining: 1.66s
668:	learn: 1908.1893120	total: 3.35s	remaining: 1.66s
669:	learn: 1907.6071535	total: 3.36s	remaining: 1.65s
670:	learn: 1907.2083059	total: 3.36s	remaining: 1.65s
671:	learn: 1906.7641559	total: 3.37s	remaining: 1.64s
672:	learn: 1906.3853849	total: 3.37s	remaining: 1.64s
673:	learn: 1905.6993526	total: 3.38s	remaining: 1.63s
674:	learn: 1904.7655996	total: 3.38s	remaining: 1.63s
675:	learn: 1904.6211307	total: 3.39s	remaining: 1.62s
676:	learn: 1904.3015767	total: 3.39s	remaining: 1.62s
677:	learn: 1903.8347608	total: 3.4s	remaining: 1.61s
678:	learn: 1903.0930039	total: 3.4s	remaining: 1.61s
679:	learn: 1902.6803103	total: 3.41s	remaining: 1.6s
680:	learn: 1902.4831595	total: 3.41s	remaining: 1.6s
681:	learn: 1902.0651061	total: 3.42s	remaining: 1.59s
682:	learn: 1901.8939651	total: 3.42s	remaining: 1.59s
683:	learn: 1901.5747207	total: 3.43s	remaining: 1.58s
684:	learn: 1901.4702575	total: 3.43s	remaining: 1.58s
685:	learn: 1901.2960615	total: 3.44s	remaining: 1.57s
686:	learn: 1900.9686744	total: 3.44s	remaining: 1.57s
687:	learn: 1900.1783409	total: 3.44s	remaining: 1.56s
688:	learn: 1898.9674669	total: 3.45s	remaining: 1.56s
689:	learn: 1898.7505151	total: 3.46s	remaining: 1.55s
690:	learn: 1898.0828143	total: 3.46s	remaining: 1.55s
691:	learn: 1897.8864721	total: 3.46s	remaining: 1.54s
692:	learn: 1897.6494806	total: 3.47s	remaining: 1.54s
693:	learn: 1897.2676098	total: 3.47s	remaining: 1.53s
694:	learn: 1896.9141425	total: 3.48s	remaining: 1.52s
695:	learn: 1896.4950852	total: 3.48s	remaining: 1.52s
696:	learn: 1896.1975518	total: 3.48s	remaining: 1.51s
697:	learn: 1895.7470704	total: 3.49s	remaining: 1.51s
698:	learn: 1895.1638165	total: 3.49s	remaining: 1.5s
699:	learn: 1894.3945945	total: 3.5s	remaining: 1.5s
700:	learn: 1893.8565117	total: 3.5s	remaining: 1.49s
701:	learn: 1893.2077595	total: 3.5s	remaining: 1.49s
702:	learn: 1892.9506973	total: 3.51s	remaining: 1.48s
703:	learn: 1892.0310965	total: 3.51s	remaining: 1.48s
704:	learn: 1891.6653710	total: 3.52s	remaining: 1.47s
705:	learn: 1891.2893392	total: 3.52s	remaining: 1.47s
706:	learn: 1890.8579018	total: 3.52s	remaining: 1.46s
707:	learn: 1890.4723481	total: 3.53s	remaining: 1.46s
708:	learn: 1889.4352253	total: 3.53s	remaining: 1.45s
709:	learn: 1888.9559607	total: 3.54s	remaining: 1.44s
710:	learn: 1888.4409118	total: 3.54s	remaining: 1.44s
711:	learn: 1887.9676716	total: 3.54s	remaining: 1.43s
712:	learn: 1887.7094590	total: 3.55s	remaining: 1.43s
713:	learn: 1887.3936179	total: 3.55s	remaining: 1.42s
714:	learn: 1887.0250097	total: 3.56s	remaining: 1.42s
715:	learn: 1885.9298356	total: 3.56s	remaining: 1.41s
716:	learn: 1885.6146034	total: 3.56s	remaining: 1.41s
717:	learn: 1885.3138020	total: 3.57s	remaining: 1.4s
718:	learn: 1884.7486559	total: 3.57s	remaining: 1.4s
719:	learn: 1884.5278681	total: 3.58s	remaining: 1.39s
720:	learn: 1884.0543104	total: 3.58s	remaining: 1.39s
721:	learn: 1883.7689942	total: 3.59s	remaining: 1.38s
722:	learn: 1883.2359678	total: 3.59s	remaining: 1.38s
723:	learn: 1882.7900670	total: 3.6s	remaining: 1.37s
724:	learn: 1882.0417508	total: 3.6s	remaining: 1.36s
725:	learn: 1881.6338492	total: 3.6s	remaining: 1.36s
726:	learn: 1881.0474060	total: 3.61s	remaining: 1.35s
727:	learn: 1880.5159664	total: 3.61s	remaining: 1.35s
728:	learn: 1879.9577001	total: 3.62s	remaining: 1.34s
729:	learn: 1879.6673509	total: 3.62s	remaining: 1.34s
730:	learn: 1879.4652901	total: 3.63s	remaining: 1.33s
731:	learn: 1878.8784480	total: 3.63s	remaining: 1.33s
732:	learn: 1878.4928173	total: 3.64s	remaining: 1.32s
733:	learn: 1877.7303874	total: 3.64s	remaining: 1.32s
734:	learn: 1877.4778627	total: 3.65s	remaining: 1.31s
735:	learn: 1877.0672062	total: 3.65s	remaining: 1.31s
736:	learn: 1876.7347534	total: 3.65s	remaining: 1.3s
737:	learn: 1876.1374506	total: 3.66s	remaining: 1.3s
738:	learn: 1875.8713518	total: 3.66s	remaining: 1.29s
739:	learn: 1875.1213789	total: 3.67s	remaining: 1.29s
740:	learn: 1874.2548331	total: 3.67s	remaining: 1.28s
741:	learn: 1873.8516541	total: 3.67s	remaining: 1.28s
742:	learn: 1873.2149389	total: 3.68s	remaining: 1.27s
743:	learn: 1872.7431538	total: 3.68s	remaining: 1.27s
744:	learn: 1871.7986992	total: 3.69s	remaining: 1.26s
745:	learn: 1871.6302371	total: 3.69s	remaining: 1.26s
746:	learn: 1870.8660665	total: 3.7s	remaining: 1.25s
747:	learn: 1870.4658899	total: 3.7s	remaining: 1.25s
748:	learn: 1869.6587752	total: 3.7s	remaining: 1.24s
749:	learn: 1869.3390240	total: 3.71s	remaining: 1.24s
750:	learn: 1868.8674037	total: 3.71s	remaining: 1.23s
751:	learn: 1868.3279014	total: 3.72s	remaining: 1.23s
752:	learn: 1868.1685566	total: 3.72s	remaining: 1.22s
753:	learn: 1867.7672232	total: 3.72s	remaining: 1.22s
754:	learn: 1866.9935289	total: 3.73s	remaining: 1.21s
755:	learn: 1866.4377809	total: 3.73s	remaining: 1.2s
756:	learn: 1865.7189083	total: 3.74s	remaining: 1.2s
757:	learn: 1865.3495852	total: 3.74s	remaining: 1.19s
758:	learn: 1864.9523783	total: 3.75s	remaining: 1.19s
759:	learn: 1864.5387007	total: 3.75s	remaining: 1.18s
760:	learn: 1863.9160700	total: 3.75s	remaining: 1.18s
761:	learn: 1863.5789544	total: 3.76s	remaining: 1.17s
762:	learn: 1863.0824206	total: 3.76s	remaining: 1.17s
763:	learn: 1862.7023531	total: 3.77s	remaining: 1.16s
764:	learn: 1862.0206040	total: 3.77s	remaining: 1.16s
765:	learn: 1861.7955833	total: 3.78s	remaining: 1.15s
766:	learn: 1861.4609954	total: 3.78s	remaining: 1.15s
767:	learn: 1861.0050528	total: 3.79s	remaining: 1.14s
768:	learn: 1860.7003573	total: 3.79s	remaining: 1.14s
769:	learn: 1860.3088270	total: 3.79s	remaining: 1.13s
770:	learn: 1859.9081828	total: 3.8s	remaining: 1.13s
771:	learn: 1859.6819268	total: 3.8s	remaining: 1.12s
772:	learn: 1859.0808768	total: 3.81s	remaining: 1.12s
773:	learn: 1858.6331858	total: 3.81s	remaining: 1.11s
774:	learn: 1858.1487309	total: 3.82s	remaining: 1.11s
775:	learn: 1857.7579143	total: 3.82s	remaining: 1.1s
776:	learn: 1857.4257328	total: 3.83s	remaining: 1.1s
777:	learn: 1857.2393679	total: 3.83s	remaining: 1.09s
778:	learn: 1856.4111159	total: 3.84s	remaining: 1.09s
779:	learn: 1855.4749348	total: 3.84s	remaining: 1.08s
780:	learn: 1855.1780536	total: 3.85s	remaining: 1.08s
781:	learn: 1854.8170596	total: 3.85s	remaining: 1.07s
782:	learn: 1854.2767736	total: 3.86s	remaining: 1.07s
783:	learn: 1853.6950934	total: 3.86s	remaining: 1.06s
784:	learn: 1853.2197965	total: 3.87s	remaining: 1.06s
785:	learn: 1852.6827726	total: 3.87s	remaining: 1.05s
786:	learn: 1852.1942178	total: 3.88s	remaining: 1.05s
787:	learn: 1851.6440378	total: 3.88s	remaining: 1.04s
788:	learn: 1851.1398290	total: 3.88s	remaining: 1.04s
789:	learn: 1850.7393305	total: 3.89s	remaining: 1.03s
790:	learn: 1850.3050427	total: 3.89s	remaining: 1.03s
791:	learn: 1849.9361299	total: 3.9s	remaining: 1.02s
792:	learn: 1849.6150211	total: 3.9s	remaining: 1.02s
793:	learn: 1848.9374366	total: 3.9s	remaining: 1.01s
794:	learn: 1848.6186723	total: 3.91s	remaining: 1.01s
795:	learn: 1848.4076063	total: 3.91s	remaining: 1s
796:	learn: 1848.0898842	total: 3.92s	remaining: 998ms
797:	learn: 1847.7880502	total: 3.92s	remaining: 992ms
798:	learn: 1847.5132685	total: 3.92s	remaining: 987ms
799:	learn: 1847.3287212	total: 3.93s	remaining: 982ms
800:	learn: 1846.8019638	total: 3.93s	remaining: 977ms
801:	learn: 1846.1562363	total: 3.94s	remaining: 972ms
802:	learn: 1845.4062519	total: 3.94s	remaining: 967ms
803:	learn: 1844.9209396	total: 3.94s	remaining: 962ms
804:	learn: 1844.6565360	total: 3.95s	remaining: 956ms
805:	learn: 1843.9883993	total: 3.95s	remaining: 951ms
806:	learn: 1843.4736788	total: 3.96s	remaining: 946ms
807:	learn: 1842.9980443	total: 3.96s	remaining: 941ms
808:	learn: 1842.7250484	total: 3.96s	remaining: 936ms
809:	learn: 1841.8599437	total: 3.97s	remaining: 931ms
810:	learn: 1841.4387698	total: 3.98s	remaining: 927ms
811:	learn: 1841.0499803	total: 3.98s	remaining: 922ms
812:	learn: 1840.6389994	total: 3.98s	remaining: 917ms
813:	learn: 1840.5824346	total: 3.99s	remaining: 912ms
814:	learn: 1840.0460492	total: 3.99s	remaining: 907ms
815:	learn: 1839.7358153	total: 4s	remaining: 901ms
816:	learn: 1839.1758207	total: 4s	remaining: 896ms
817:	learn: 1838.7747853	total: 4s	remaining: 891ms
818:	learn: 1838.6663974	total: 4.01s	remaining: 886ms
819:	learn: 1838.1439184	total: 4.01s	remaining: 881ms
820:	learn: 1837.8198129	total: 4.02s	remaining: 876ms
821:	learn: 1836.9727506	total: 4.02s	remaining: 871ms
822:	learn: 1836.6343852	total: 4.03s	remaining: 866ms
823:	learn: 1836.2584488	total: 4.03s	remaining: 861ms
824:	learn: 1836.2507165	total: 4.04s	remaining: 856ms
825:	learn: 1835.8576619	total: 4.04s	remaining: 851ms
826:	learn: 1835.6178868	total: 4.05s	remaining: 847ms
827:	learn: 1835.4892071	total: 4.05s	remaining: 842ms
828:	learn: 1835.1629321	total: 4.06s	remaining: 837ms
829:	learn: 1834.9497893	total: 4.06s	remaining: 832ms
830:	learn: 1834.5549875	total: 4.06s	remaining: 827ms
831:	learn: 1834.0988549	total: 4.07s	remaining: 822ms
832:	learn: 1833.8189952	total: 4.07s	remaining: 816ms
833:	learn: 1833.3335242	total: 4.08s	remaining: 812ms
834:	learn: 1832.9733766	total: 4.08s	remaining: 807ms
835:	learn: 1832.8071890	total: 4.09s	remaining: 802ms
836:	learn: 1832.5660391	total: 4.09s	remaining: 797ms
837:	learn: 1831.9180126	total: 4.09s	remaining: 792ms
838:	learn: 1831.1675342	total: 4.1s	remaining: 787ms
839:	learn: 1830.7563210	total: 4.1s	remaining: 782ms
840:	learn: 1830.3246842	total: 4.11s	remaining: 777ms
841:	learn: 1830.0657664	total: 4.11s	remaining: 771ms
842:	learn: 1829.4229004	total: 4.12s	remaining: 767ms
843:	learn: 1828.7315034	total: 4.12s	remaining: 761ms
844:	learn: 1828.4512671	total: 4.12s	remaining: 756ms
845:	learn: 1827.8254092	total: 4.13s	remaining: 751ms
846:	learn: 1827.2232798	total: 4.13s	remaining: 747ms
847:	learn: 1826.6298672	total: 4.14s	remaining: 742ms
848:	learn: 1825.9501280	total: 4.14s	remaining: 737ms
849:	learn: 1825.7641712	total: 4.15s	remaining: 732ms
850:	learn: 1825.4470687	total: 4.15s	remaining: 727ms
851:	learn: 1825.1894602	total: 4.16s	remaining: 722ms
852:	learn: 1824.7547814	total: 4.16s	remaining: 717ms
853:	learn: 1824.1378522	total: 4.17s	remaining: 712ms
854:	learn: 1823.3788765	total: 4.17s	remaining: 708ms
855:	learn: 1823.2798198	total: 4.18s	remaining: 703ms
856:	learn: 1822.8942754	total: 4.18s	remaining: 698ms
857:	learn: 1822.6624884	total: 4.19s	remaining: 693ms
858:	learn: 1822.3470225	total: 4.19s	remaining: 688ms
859:	learn: 1822.0552062	total: 4.2s	remaining: 683ms
860:	learn: 1821.4936337	total: 4.2s	remaining: 678ms
861:	learn: 1821.3556329	total: 4.21s	remaining: 673ms
862:	learn: 1821.0457434	total: 4.21s	remaining: 668ms
863:	learn: 1820.3942371	total: 4.21s	remaining: 663ms
864:	learn: 1820.1204935	total: 4.22s	remaining: 658ms
865:	learn: 1819.9671185	total: 4.22s	remaining: 654ms
866:	learn: 1819.8749744	total: 4.23s	remaining: 649ms
867:	learn: 1819.6347894	total: 4.24s	remaining: 644ms
868:	learn: 1819.3199081	total: 4.24s	remaining: 639ms
869:	learn: 1818.9499363	total: 4.24s	remaining: 634ms
870:	learn: 1818.5044571	total: 4.25s	remaining: 629ms
871:	learn: 1817.8638811	total: 4.25s	remaining: 624ms
872:	learn: 1817.4098410	total: 4.25s	remaining: 619ms
873:	learn: 1817.0773847	total: 4.26s	remaining: 614ms
874:	learn: 1816.7364761	total: 4.26s	remaining: 609ms
875:	learn: 1816.4416540	total: 4.27s	remaining: 604ms
876:	learn: 1815.9989090	total: 4.27s	remaining: 599ms
877:	learn: 1815.8309861	total: 4.27s	remaining: 594ms
878:	learn: 1815.0542665	total: 4.28s	remaining: 589ms
879:	learn: 1814.6943249	total: 4.28s	remaining: 584ms
880:	learn: 1814.2092583	total: 4.29s	remaining: 579ms
881:	learn: 1813.8651675	total: 4.29s	remaining: 574ms
882:	learn: 1813.5007452	total: 4.29s	remaining: 569ms
883:	learn: 1813.1900335	total: 4.3s	remaining: 564ms
884:	learn: 1812.7738534	total: 4.3s	remaining: 559ms
885:	learn: 1812.4607602	total: 4.3s	remaining: 554ms
886:	learn: 1811.8562410	total: 4.31s	remaining: 549ms
887:	learn: 1811.5294675	total: 4.31s	remaining: 544ms
888:	learn: 1811.0188588	total: 4.32s	remaining: 539ms
889:	learn: 1810.7853011	total: 4.32s	remaining: 534ms
890:	learn: 1810.2405183	total: 4.32s	remaining: 529ms
891:	learn: 1810.0319371	total: 4.33s	remaining: 524ms
892:	learn: 1809.8140306	total: 4.33s	remaining: 519ms
893:	learn: 1809.2706172	total: 4.34s	remaining: 514ms
894:	learn: 1808.7636476	total: 4.34s	remaining: 510ms
895:	learn: 1808.5795658	total: 4.35s	remaining: 505ms
896:	learn: 1808.2682456	total: 4.35s	remaining: 500ms
897:	learn: 1807.6999513	total: 4.36s	remaining: 495ms
898:	learn: 1807.1011318	total: 4.36s	remaining: 490ms
899:	learn: 1806.9458012	total: 4.37s	remaining: 485ms
900:	learn: 1806.4977533	total: 4.37s	remaining: 480ms
901:	learn: 1805.7089914	total: 4.38s	remaining: 475ms
902:	learn: 1805.3878730	total: 4.38s	remaining: 470ms
903:	learn: 1804.9064780	total: 4.38s	remaining: 466ms
904:	learn: 1804.6975135	total: 4.39s	remaining: 461ms
905:	learn: 1804.2327366	total: 4.39s	remaining: 456ms
906:	learn: 1803.5734829	total: 4.39s	remaining: 451ms
907:	learn: 1803.2696864	total: 4.4s	remaining: 446ms
908:	learn: 1802.8716096	total: 4.4s	remaining: 441ms
909:	learn: 1802.3973925	total: 4.41s	remaining: 436ms
910:	learn: 1802.0958028	total: 4.41s	remaining: 431ms
911:	learn: 1801.2755515	total: 4.42s	remaining: 426ms
912:	learn: 1801.0253090	total: 4.42s	remaining: 421ms
913:	learn: 1800.8272483	total: 4.42s	remaining: 416ms
914:	learn: 1800.3851756	total: 4.43s	remaining: 411ms
915:	learn: 1799.7905522	total: 4.43s	remaining: 407ms
916:	learn: 1799.4320431	total: 4.44s	remaining: 402ms
917:	learn: 1799.2001094	total: 4.44s	remaining: 397ms
918:	learn: 1798.6310586	total: 4.45s	remaining: 392ms
919:	learn: 1797.7435449	total: 4.45s	remaining: 387ms
920:	learn: 1797.5094310	total: 4.46s	remaining: 382ms
921:	learn: 1796.9513802	total: 4.46s	remaining: 377ms
922:	learn: 1796.5800373	total: 4.46s	remaining: 373ms
923:	learn: 1796.2969722	total: 4.47s	remaining: 368ms
924:	learn: 1795.7591360	total: 4.47s	remaining: 363ms
925:	learn: 1795.2822373	total: 4.48s	remaining: 358ms
926:	learn: 1794.8762874	total: 4.48s	remaining: 353ms
927:	learn: 1794.5507209	total: 4.49s	remaining: 348ms
928:	learn: 1794.2487088	total: 4.49s	remaining: 343ms
929:	learn: 1793.8825939	total: 4.5s	remaining: 338ms
930:	learn: 1793.1689341	total: 4.5s	remaining: 333ms
931:	learn: 1792.8099203	total: 4.5s	remaining: 329ms
932:	learn: 1792.6308175	total: 4.51s	remaining: 324ms
933:	learn: 1792.3575632	total: 4.51s	remaining: 319ms
934:	learn: 1791.8779254	total: 4.52s	remaining: 314ms
935:	learn: 1791.6877046	total: 4.52s	remaining: 309ms
936:	learn: 1791.5026893	total: 4.53s	remaining: 304ms
937:	learn: 1791.4013407	total: 4.53s	remaining: 300ms
938:	learn: 1791.1824732	total: 4.54s	remaining: 295ms
939:	learn: 1790.7292647	total: 4.54s	remaining: 290ms
940:	learn: 1790.2089778	total: 4.55s	remaining: 285ms
941:	learn: 1789.6764870	total: 4.55s	remaining: 280ms
942:	learn: 1789.3655891	total: 4.56s	remaining: 276ms
943:	learn: 1788.9692773	total: 4.56s	remaining: 271ms
944:	learn: 1788.4461083	total: 4.57s	remaining: 266ms
945:	learn: 1788.1874135	total: 4.57s	remaining: 261ms
946:	learn: 1787.9922170	total: 4.57s	remaining: 256ms
947:	learn: 1787.4156234	total: 4.58s	remaining: 251ms
948:	learn: 1787.0741383	total: 4.58s	remaining: 246ms
949:	learn: 1786.6421431	total: 4.59s	remaining: 241ms
950:	learn: 1786.4084330	total: 4.59s	remaining: 237ms
951:	learn: 1786.2056633	total: 4.59s	remaining: 232ms
952:	learn: 1785.9851520	total: 4.6s	remaining: 227ms
953:	learn: 1785.7392602	total: 4.6s	remaining: 222ms
954:	learn: 1785.3149446	total: 4.61s	remaining: 217ms
955:	learn: 1785.1144115	total: 4.61s	remaining: 212ms
956:	learn: 1784.7228658	total: 4.62s	remaining: 207ms
957:	learn: 1784.5047009	total: 4.62s	remaining: 203ms
958:	learn: 1784.2428992	total: 4.63s	remaining: 198ms
959:	learn: 1783.6802043	total: 4.63s	remaining: 193ms
960:	learn: 1783.3047601	total: 4.64s	remaining: 188ms
961:	learn: 1783.0702734	total: 4.64s	remaining: 183ms
962:	learn: 1782.9234214	total: 4.65s	remaining: 179ms
963:	learn: 1782.7022124	total: 4.65s	remaining: 174ms
964:	learn: 1782.3334651	total: 4.66s	remaining: 169ms
965:	learn: 1782.0711641	total: 4.66s	remaining: 164ms
966:	learn: 1781.8447091	total: 4.67s	remaining: 159ms
967:	learn: 1781.1794599	total: 4.67s	remaining: 154ms
968:	learn: 1780.8719698	total: 4.68s	remaining: 150ms
969:	learn: 1780.4503615	total: 4.68s	remaining: 145ms
970:	learn: 1780.0814279	total: 4.68s	remaining: 140ms
971:	learn: 1779.8413046	total: 4.69s	remaining: 135ms
972:	learn: 1779.6169348	total: 4.69s	remaining: 130ms
973:	learn: 1779.4562521	total: 4.7s	remaining: 125ms
974:	learn: 1779.3037670	total: 4.7s	remaining: 121ms
975:	learn: 1778.8678870	total: 4.7s	remaining: 116ms
976:	learn: 1778.4888436	total: 4.71s	remaining: 111ms
977:	learn: 1778.3662453	total: 4.71s	remaining: 106ms
978:	learn: 1777.7930104	total: 4.72s	remaining: 101ms
979:	learn: 1777.4165604	total: 4.72s	remaining: 96.3ms
980:	learn: 1777.0851564	total: 4.72s	remaining: 91.5ms
981:	learn: 1777.0152936	total: 4.73s	remaining: 86.7ms
982:	learn: 1776.7253926	total: 4.74s	remaining: 81.9ms
983:	learn: 1776.1054952	total: 4.74s	remaining: 77.1ms
984:	learn: 1775.7898864	total: 4.74s	remaining: 72.2ms
985:	learn: 1775.4270102	total: 4.75s	remaining: 67.4ms
986:	learn: 1775.0761904	total: 4.75s	remaining: 62.6ms
987:	learn: 1774.5222134	total: 4.76s	remaining: 57.8ms
988:	learn: 1774.2816872	total: 4.76s	remaining: 53ms
989:	learn: 1773.8650257	total: 4.77s	remaining: 48.2ms
990:	learn: 1773.1775081	total: 4.77s	remaining: 43.3ms
991:	learn: 1772.8075712	total: 4.78s	remaining: 38.5ms
992:	learn: 1772.4544536	total: 4.78s	remaining: 33.7ms
993:	learn: 1772.2659066	total: 4.78s	remaining: 28.9ms
994:	learn: 1772.0462978	total: 4.79s	remaining: 24.1ms
995:	learn: 1771.2044114	total: 4.79s	remaining: 19.2ms
996:	learn: 1770.9048234	total: 4.8s	remaining: 14.4ms
997:	learn: 1770.4500877	total: 4.8s	remaining: 9.62ms
998:	learn: 1770.1346179	total: 4.8s	remaining: 4.81ms
999:	learn: 1769.8343384	total: 4.81s	remaining: 0us
<catboost.core.CatBoostRegressor object at 0x0000020428F4F890>
In [39]:
#Cat Boost Prediction 
cat_pred=cat.predict(X_test)
display (cat_pred)
array([13386.63817795, 24056.86842882, 28082.99514751, ...,
       45959.31234642, 31714.44751009,  9481.45994163])
In [40]:
#Cat Boost Accuracy
display (r2_score(Y_test,cat_pred))
0.9641612028134969
In [41]:
#Create Pickle File 
#Use pickle to save our model so that we can use it later
import pickle 
# Saving model to disk
pickle.dump(cat, open('model.pkl','wb'))
In [42]:
#Load Pickle File and  do Prediction  
model=pickle.load(open('model.pkl','rb'))
print (model.predict (X_train))
[14256.69390956 29475.32741129 11911.12888914 ... 21406.16876331
 17189.65426694 44839.56117235]
In [43]:
#EXPLORATORY DATA ANALYSIS
import pandas_profiling as pf
display(pf.ProfileReport(df))
Summarize dataset:   0%|          | 0/5 [00:00<?, ?it/s]
Generate report structure:   0%|          | 0/1 [00:00<?, ?it/s]
Render HTML:   0%|          | 0/1 [00:00<?, ?it/s]